var runtime.gcController
148 uses
runtime (current package)
arena.go#L787: gcController.heapInUse.add(-int64(s.npages * pageSize))
arena.go#L793: gcController.totalFree.Add(int64(s.npages * pageSize))
arena.go#L807: gcController.update(-int64(s.npages*pageSize), 0)
arena.go#L948: sysMap(unsafe.Pointer(base), userArenaChunkBytes, &gcController.heapReleased)
arena.go#L957: gcController.heapInUse.add(int64(userArenaChunkBytes))
arena.go#L958: gcController.heapReleased.add(-int64(userArenaChunkBytes))
arena.go#L970: gcController.totalAlloc.Add(int64(userArenaChunkBytes))
arena.go#L973: gcController.update(int64(userArenaChunkBytes), 0)
malloc.go#L602: gcController.memoryLimit.Store(maxInt64)
malloc.go#L632: v = h.arena.alloc(n, heapArenaBytes, &gcController.heapReleased)
mcache.go#L175: gcController.totalAlloc.Add(bytesAllocated)
mcache.go#L212: gcController.update(int64(s.npages*pageSize)-int64(usedBytes), int64(c.scanAlloc))
mcache.go#L246: gcController.totalAlloc.Add(int64(npages * pageSize))
mcache.go#L249: gcController.update(int64(s.npages*pageSize), 0)
mcache.go#L279: gcController.totalAlloc.Add(slotsUsed * int64(s.elemsize))
mcache.go#L306: gcController.update(dHeapLive, scanAlloc)
mem.go#L51: gcController.mappedReady.Add(int64(n))
mem.go#L61: gcController.mappedReady.Add(-int64(n))
mem.go#L76: gcController.mappedReady.Add(int64(prepared))
mem.go#L115: gcController.mappedReady.Add(-int64(n))
mem.go#L129: gcController.mappedReady.Add(-int64(n))
metrics.go#L282: out.scalar = uint64(gcController.memoryLimit.Load())
metrics.go#L288: out.scalar = uint64(gcController.gcPercent.Load())
metrics.go#L295: out.scalar = gcController.heapMarked
metrics.go#L644: a.heapGoal = gcController.heapGoal()
metrics.go#L685: a.heapScan = gcController.heapScan.Load()
metrics.go#L686: a.stackScan = uint64(gcController.lastStackScan.Load())
metrics.go#L687: a.globalsScan = gcController.globalsScan.Load()
mgc.go#L184: gcController.init(readGOGC(), readGOMEMLIMIT())
mgc.go#L292: delta := now - gcController.markStartTime
mgc.go#L300: return float64(selfTime)/float64(delta) > 1.2*gcController.fractionalUtilizationGoal
mgc.go#L579: trigger, _ := gcController.trigger()
mgc.go#L580: return gcController.heapLive.Load() >= trigger
mgc.go#L582: if gcController.gcPercent.Load() < 0 {
mgc.go#L677: work.heap0 = gcController.heapLive.Load()
mgc.go#L698: gcController.startCycle(now, int(gomaxprocs), trigger)
mgc.go#L935: gcController.endCycle(now, int(gomaxprocs), work.userForced)
mgc.go#L947: work.heap1 = gcController.heapLive.Load()
mgc.go#L1008: memstats.lastHeapInUse = gcController.heapInUse.load()
mgc.go#L1152: gcController.assistTime.Load(),
mgc.go#L1153: gcController.dedicatedMarkTime.Load() + gcController.fractionalMarkTime.Load(),
mgc.go#L1154: gcController.idleMarkTime.Load(),
mgc.go#L1167: gcController.lastHeapGoal>>20, " MB goal, ",
mgc.go#L1168: gcController.lastStackScan.Load()>>20, " MB stacks, ",
mgc.go#L1169: gcController.globalsScan.Load()>>20, " MB globals, ",
mgc.go#L1188: if gcController.heapGoal() > minHeapForMetadataHugePages {
mgc.go#L1395: gcController.markWorkerStop(pp.gcMarkWorkerMode, duration)
mgc.go#L1537: gcController.resetLive(work.bytesMarked)
mgc.go#L1626: work.initialHeapLive = gcController.heapLive.Load()
mgcmark.go#L169: workCounter = &gcController.globalsScanWork
mgcmark.go#L175: workCounter = &gcController.globalsScanWork
mgcmark.go#L197: workCounter = &gcController.stackScanWork
mgcmark.go#L430: assistWorkPerByte := gcController.assistWorkPerByte.Load()
mgcmark.go#L431: assistBytesPerWork := gcController.assistBytesPerWork.Load()
mgcmark.go#L445: bgScanCredit := gcController.bgScanCredit.Load()
mgcmark.go#L455: gcController.bgScanCredit.Add(-stolen)
mgcmark.go#L578: assistBytesPerWork := gcController.assistBytesPerWork.Load()
mgcmark.go#L605: gcController.assistTime.Add(pp.gcAssistTime)
mgcmark.go#L643: if gcController.bgScanCredit.Load() > 0 {
mgcmark.go#L672: gcController.bgScanCredit.Add(scanWork)
mgcmark.go#L676: assistBytesPerWork := gcController.assistBytesPerWork.Load()
mgcmark.go#L710: assistWorkPerByte := gcController.assistWorkPerByte.Load()
mgcmark.go#L712: gcController.bgScanCredit.Add(scanWork)
mgcmark.go#L1109: gcController.heapScanWork.Add(gcw.heapScanWork)
mgcmark.go#L1129: gcController.heapScanWork.Add(gcw.heapScanWork)
mgcmark.go#L1196: gcController.heapScanWork.Add(gcw.heapScanWork)
mgcpacer.go#L88: var gcController gcControllerState
mgcpacer.go#L460: " (scan ", gcController.heapScan.Load()>>20, " MB in ",
mgcpacer.go#L601: gcController.lastHeapGoal = c.heapGoal()
mgcpacer.go#L859: live := gcController.heapLive.Add(dHeapLive)
mgcpacer.go#L869: gcController.heapScan.Add(dHeapScan)
mgcpacer.go#L1263: out = gcController.setGCPercent(in)
mgcpacer.go#L1310: out = gcController.setMemoryLimit(in)
mgcpacer.go#L1428: gcController.commit(isSweepDone())
mgcpacer.go#L1432: gcController.revise()
mgcpacer.go#L1441: trigger, heapGoal := gcController.trigger()
mgcpacer.go#L1443: gcPaceScavenger(gcController.memoryLimit.Load(), heapGoal, gcController.lastHeapGoal)
mgcscavenge.go#L151: return gcController.heapInUse.load() + gcController.heapFree.load()
mgcscavenge.go#L179: mappedReady := gcController.mappedReady.Load()
mgcscavenge.go#L408: gcController.mappedReady.Load() <= scavenge.memoryLimitGoal.Load()
mgcscavenge.go#L706: gcController.heapReleased.load()>>10, " KiB now, ",
mgcscavenge.go#L707: (gcController.heapInUse.load()*100)/heapRetained(), "% util",
mgcscavenge.go#L789: gcController.heapReleased.add(nbytes)
mgcscavenge.go#L790: gcController.heapFree.add(-nbytes)
mgcsweep.go#L179: live := gcController.heapLive.Load()
mgcsweep.go#L749: gcController.totalFree.Add(int64(nfreed) * int64(s.elemsize))
mgcsweep.go#L802: gcController.totalFree.Add(int64(size))
mgcsweep.go#L894: live := gcController.heapLive.Load()
mgcsweep.go#L958: heapLiveBasis := gcController.heapLive.Load()
mgcwork.go#L144: gcController.enlistWorker()
mgcwork.go#L193: gcController.enlistWorker()
mgcwork.go#L279: gcController.heapScanWork.Add(w.heapScanWork)
mgcwork.go#L304: gcController.enlistWorker()
mheap.go#L1281: if limit := gcController.memoryLimit.Load(); !gcCPULimiter.limiting() {
mheap.go#L1284: inuse := gcController.mappedReady.Load()
mheap.go#L1352: gcController.heapReleased.add(-int64(scav))
mheap.go#L1355: gcController.heapFree.add(-int64(nbytes - scav))
mheap.go#L1357: gcController.heapInUse.add(int64(nbytes))
mheap.go#L1485: inUse := gcController.heapFree.load() + gcController.heapReleased.load() + gcController.heapInUse.load()
mheap.go#L1502: sysMap(unsafe.Pointer(h.curArena.base), size, &gcController.heapReleased)
mheap.go#L1533: sysMap(unsafe.Pointer(v), nBase-v, &gcController.heapReleased)
mheap.go#L1620: gcController.heapFree.add(int64(nbytes))
mheap.go#L1622: gcController.heapInUse.add(-int64(nbytes))
mstats.go#L437: totalMapped := gcController.heapInUse.load() + gcController.heapFree.load() + gcController.heapReleased.load() +
mstats.go#L442: heapGoal := gcController.heapGoal()
mstats.go#L458: if gcController.heapInUse.load() != uint64(consStats.inHeap) {
mstats.go#L459: print("runtime: heapInUse=", gcController.heapInUse.load(), "\n")
mstats.go#L463: if gcController.heapReleased.load() != uint64(consStats.released) {
mstats.go#L464: print("runtime: heapReleased=", gcController.heapReleased.load(), "\n")
mstats.go#L468: heapRetained := gcController.heapInUse.load() + gcController.heapFree.load()
mstats.go#L475: if gcController.totalAlloc.Load() != totalAlloc {
mstats.go#L476: print("runtime: totalAlloc=", gcController.totalAlloc.Load(), "\n")
mstats.go#L480: if gcController.totalFree.Load() != totalFree {
mstats.go#L481: print("runtime: totalFree=", gcController.totalFree.Load(), "\n")
mstats.go#L488: if gcController.mappedReady.Load() != totalMapped-uint64(consStats.released) {
mstats.go#L489: print("runtime: mappedReady=", gcController.mappedReady.Load(), "\n")
mstats.go#L504: stats.HeapSys = gcController.heapInUse.load() + gcController.heapFree.load() + gcController.heapReleased.load()
mstats.go#L521: stats.HeapIdle = gcController.heapFree.load() + gcController.heapReleased.load()
mstats.go#L522: stats.HeapInuse = gcController.heapInUse.load()
mstats.go#L523: stats.HeapReleased = gcController.heapReleased.load()
mstats.go#L933: markAssistCpu = gcController.assistTime.Load()
mstats.go#L934: markDedicatedCpu = gcController.dedicatedMarkTime.Load()
mstats.go#L935: markFractionalCpu = gcController.fractionalMarkTime.Load()
mstats.go#L936: markIdleCpu = gcController.idleMarkTime.Load()
proc.go#L2926: gp, tnow := gcController.findRunnableGCWorker(pp, now)
proc.go#L3021: if gcBlackenEnabled != 0 && gcMarkWorkAvailable(pp) && gcController.addIdleMarkWorker() {
proc.go#L3032: gcController.removeIdleMarkWorker()
proc.go#L3380: if atomic.Load(&gcBlackenEnabled) == 0 || !gcController.needIdleMarkWorker() {
proc.go#L3412: if gcBlackenEnabled == 0 || !gcController.addIdleMarkWorker() {
proc.go#L3422: gcController.removeIdleMarkWorker()
proc.go#L3866: gcController.addScannableStack(pp, -int64(gp.stack.hi-gp.stack.lo))
proc.go#L3888: assistWorkPerByte := gcController.assistWorkPerByte.Load()
proc.go#L3890: gcController.bgScanCredit.Add(scanCredit)
proc.go#L4559: gcController.addScannableStack(pp, int64(newg.stack.hi-newg.stack.lo))
stack.go#L871: gcController.addScannableStack(getg().m.p.ptr(), int64(newsize)-int64(old.hi-old.lo))
symtab.go#L464: gcController.addGlobals(int64(scanDataSize + scanBSSSize))
trace.go#L1693: heapGoal := gcController.heapGoal()
|
The pages are generated with Golds v0.6.7. (GOOS=linux GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |